Language modeling 2_RNN model

with Giovanni
build
def build_model(vocab_size, embedding_dim, rnn_units):
model=tf.keras.Sequential([
tf.keras.layers.Embedding(vocab_size, embedding_dim),
tf.keras.layers.LSTM(rnn_units, return_sequences=True),
tf.keras.layers.Dense(vocab_size)])
return model
charset_size=len(char_array)
embedding_dim=256
rnn_units=512
tf.random.set_seed(1)
model=build_model(vocab_size=charset_size, embedding_dim=embedding_dim, rnn_units=rnn_units)
model.summary()
Model: "sequential"
_________________________________________________________________
Layer (type)                 Output Shape              Param #
=================================================================
embedding (Embedding)        (None, None, 256)         20480
_________________________________________________________________
lstm (LSTM)                  (None, None, 512)         1574912
_________________________________________________________________
dense (Dense)                (None, None, 80)          41040
=================================================================
Total params: 1,636,432
Trainable params: 1,636,432
Non-trainable params: 0
_________________________________________________________________
LSTM은 랭크 3 크기의 (None, None, 512)를 출력한다
첫번째 차원은 batch, 두 번째 차원은 출력 시퀀스 길이(40), 세번째 차원은 은닉 유닛의 개수에 해 당

최종 출력 또한 (None, None, 80)으로 랭크 3의 텐서가 출력된다.
compile & fit
model.compile(
optimizer='adam',
loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True))
model.fit(ds, epochs=20)